46b2fcdef0b0fa9d741951cbb6f9c97b53d09be4,ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/cache/TimelineMetricsCache.java,TimelineMetricWrapper,putMetric,#TimelineMetric#,77
Before Change
public synchronized void putMetric(TimelineMetric metric) {
TreeMap<Long, Double> metricValues = this.timelineMetric.getMetricValues();
if (metricValues.size() > maxRecsPerName) {
// remove values for eldest maxEvictionTimeInMillis
long newEldestTimestamp = oldestTimestamp + maxEvictionTimeInMillis;
TreeMap<Long, Double> metricsSubSet =
new TreeMap<>(metricValues.tailMap(newEldestTimestamp));
if (metricsSubSet.isEmpty()) {
oldestTimestamp = metric.getStartTime();
this.timelineMetric.setStartTime(metric.getStartTime());
} else {
Long newStartTime = metricsSubSet.firstKey();
oldestTimestamp = newStartTime;
this.timelineMetric.setStartTime(newStartTime);
}
this.timelineMetric.setMetricValues(metricsSubSet);
LOG.warn("Metrics cache overflow. Values for metric " +
metric.getMetricName() + " older than " + newEldestTimestamp +
" were removed to clean up the cache.");
}
this.timelineMetric.addMetricValues(metric.getMetricValues());
updateTimeDiff(metric.getStartTime());
}
public synchronized long getTimeDiff() {
After Change
putMetric(timelineMetric);
}
public synchronized void putMetric(TimelineMetric metric) {
if (dataPointsCache.size() == 0) {
oldestTimeStamp = metric.getStartTime();
newestTimeStamp = metric.getStartTime();
}
TreeMap<Long, Double> metricValues = metric.getMetricValues();
for (Map.Entry<Long, Double> entry : metricValues.entrySet()) {
Long key = entry.getKey();
dataPointsCache.put(key, entry.getValue());
}
oldestTimeStamp = Math.min(oldestTimeStamp, metric.getStartTime());
newestTimeStamp = Math.max(newestTimeStamp, metric.getStartTime());
}